import os
import cv2
import numpy as np
x = [] # Feature dataset with images
y = [] # Target dataset with labels
folder_dir = './Flower-Data_CNN'
size = 128 #Crop the image to 128x128
for folder in os.listdir(folder_dir):
for file in os.listdir(os.path.join(folder_dir, folder)):
if file.endswith("JPG"):
y.append(folder)
img = cv2.imread(os.path.join(folder_dir, folder, file))
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
im = cv2.resize(img_rgb, (size,size))
x.append(im)
else:
continue
# split the dataset into 80% training and 20% test sets:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.20, random_state=10)
# take a look at one of the images:
X_train[0]
array([[[176, 193, 190],
[178, 193, 192],
[153, 175, 169],
...,
[ 5, 5, 5],
[ 10, 15, 12],
[ 21, 32, 31]],
[[168, 189, 183],
[158, 185, 181],
[142, 172, 167],
...,
[ 5, 9, 7],
[ 19, 28, 24],
[ 19, 31, 29]],
[[135, 167, 158],
[108, 145, 137],
[ 94, 127, 117],
...,
[ 16, 25, 25],
[ 17, 28, 27],
[ 23, 36, 34]],
...,
[[ 67, 101, 129],
[ 67, 104, 133],
[ 67, 106, 137],
...,
[ 23, 35, 44],
[ 47, 72, 79],
[ 81, 112, 124]],
[[ 69, 106, 135],
[ 64, 101, 131],
[ 66, 101, 130],
...,
[108, 134, 144],
[ 59, 96, 100],
[ 88, 120, 128]],
[[ 17, 33, 41],
[ 19, 31, 36],
[ 24, 36, 40],
...,
[ 89, 129, 146],
[ 65, 99, 104],
[ 77, 106, 108]]], dtype=uint8)
# Show the image
import matplotlib.pyplot as plt
img = plt.imshow(X_train[0])
# Print the label of the image above:
y_train[0]
'Southern Marsh Orchid'
# Convert all the labels in numerical values
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.utils import to_categorical
y_train = LabelEncoder().fit_transform(y_train)
y_test = LabelEncoder().fit_transform(y_test)
y_train
array([24, 3, 1, ..., 5, 20, 5], dtype=int64)
import tensorflow as tf
y_train_one_hot = tf.one_hot(y_train, depth=30)
y_test_one_hot = tf.one_hot(y_test, depth=30)
y_train_one_hot
<tf.Tensor: shape=(5346, 30), dtype=float32, numpy=
array([[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.],
[0., 1., 0., ..., 0., 0., 0.],
...,
[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.],
[0., 0., 0., ..., 0., 0., 0.]], dtype=float32)>
# Normalize the pixels in the images to be values between 0 and 1
X_train = np.array(X_train)/255
X_test= np.array(X_test)/255
X_train
array([[[[0.69019608, 0.75686275, 0.74509804],
[0.69803922, 0.75686275, 0.75294118],
[0.6 , 0.68627451, 0.6627451 ],
...,
[0.01960784, 0.01960784, 0.01960784],
[0.03921569, 0.05882353, 0.04705882],
[0.08235294, 0.1254902 , 0.12156863]],
[[0.65882353, 0.74117647, 0.71764706],
[0.61960784, 0.7254902 , 0.70980392],
[0.55686275, 0.6745098 , 0.65490196],
...,
[0.01960784, 0.03529412, 0.02745098],
[0.0745098 , 0.10980392, 0.09411765],
[0.0745098 , 0.12156863, 0.11372549]],
[[0.52941176, 0.65490196, 0.61960784],
[0.42352941, 0.56862745, 0.5372549 ],
[0.36862745, 0.49803922, 0.45882353],
...,
[0.0627451 , 0.09803922, 0.09803922],
[0.06666667, 0.10980392, 0.10588235],
[0.09019608, 0.14117647, 0.13333333]],
...,
[[0.2627451 , 0.39607843, 0.50588235],
[0.2627451 , 0.40784314, 0.52156863],
[0.2627451 , 0.41568627, 0.5372549 ],
...,
[0.09019608, 0.1372549 , 0.17254902],
[0.18431373, 0.28235294, 0.30980392],
[0.31764706, 0.43921569, 0.48627451]],
[[0.27058824, 0.41568627, 0.52941176],
[0.25098039, 0.39607843, 0.51372549],
[0.25882353, 0.39607843, 0.50980392],
...,
[0.42352941, 0.5254902 , 0.56470588],
[0.23137255, 0.37647059, 0.39215686],
[0.34509804, 0.47058824, 0.50196078]],
[[0.06666667, 0.12941176, 0.16078431],
[0.0745098 , 0.12156863, 0.14117647],
[0.09411765, 0.14117647, 0.15686275],
...,
[0.34901961, 0.50588235, 0.57254902],
[0.25490196, 0.38823529, 0.40784314],
[0.30196078, 0.41568627, 0.42352941]]],
[[[0.38431373, 0.45490196, 0.1254902 ],
[0.37254902, 0.44313725, 0.12156863],
[0.36862745, 0.43529412, 0.11764706],
...,
[0.45490196, 0.50588235, 0.23137255],
[0.51764706, 0.56862745, 0.29411765],
[0.56078431, 0.61176471, 0.34117647]],
[[0.38039216, 0.45098039, 0.12156863],
[0.37647059, 0.44705882, 0.12156863],
[0.36470588, 0.43529412, 0.11372549],
...,
[0.53333333, 0.58431373, 0.30980392],
[0.55686275, 0.60784314, 0.3372549 ],
[0.48235294, 0.52941176, 0.26666667]],
[[0.40784314, 0.48235294, 0.14901961],
[0.37254902, 0.44705882, 0.11764706],
[0.37254902, 0.44705882, 0.12156863],
...,
[0.55294118, 0.60392157, 0.33333333],
[0.50980392, 0.55686275, 0.30588235],
[0.41960784, 0.4627451 , 0.22352941]],
...,
[[0.36862745, 0.40784314, 0.20784314],
[0.34901961, 0.38039216, 0.2 ],
[0.32941176, 0.34901961, 0.2 ],
...,
[0.31372549, 0.37254902, 0.14117647],
[0.30980392, 0.36862745, 0.14509804],
[0.29803922, 0.35294118, 0.14117647]],
[[0.33333333, 0.36470588, 0.2 ],
[0.31372549, 0.3372549 , 0.18823529],
[0.29411765, 0.31372549, 0.18823529],
...,
[0.31372549, 0.37254902, 0.14901961],
[0.30196078, 0.36078431, 0.14509804],
[0.28627451, 0.34117647, 0.13333333]],
[[0.30588235, 0.3372549 , 0.18431373],
[0.28627451, 0.31372549, 0.17647059],
[0.25882353, 0.27843137, 0.16862745],
...,
[0.30196078, 0.36862745, 0.14901961],
[0.27843137, 0.34901961, 0.13333333],
[0.27058824, 0.3372549 , 0.1254902 ]]],
[[[0.39215686, 0.52941176, 0.27058824],
[0.36470588, 0.50196078, 0.24705882],
[0.34509804, 0.48235294, 0.23529412],
...,
[0.65882353, 0.74509804, 0.60784314],
[0.64313725, 0.74509804, 0.56470588],
[0.63529412, 0.74509804, 0.63921569]],
[[0.43921569, 0.57647059, 0.30980392],
[0.41960784, 0.55686275, 0.29019608],
[0.38823529, 0.5254902 , 0.25882353],
...,
[0.64313725, 0.74117647, 0.59215686],
[0.64313725, 0.74509804, 0.58823529],
[0.62745098, 0.7254902 , 0.64705882]],
[[0.45882353, 0.59215686, 0.35294118],
[0.44313725, 0.58039216, 0.33333333],
[0.43921569, 0.57647059, 0.30980392],
...,
[0.63529412, 0.74117647, 0.58823529],
[0.63529412, 0.74117647, 0.60784314],
[0.62352941, 0.70980392, 0.63921569]],
...,
[[0.33333333, 0.41568627, 0.27843137],
[0.41176471, 0.50196078, 0.35294118],
[0.42745098, 0.5254902 , 0.35686275],
...,
[0.37647059, 0.49411765, 0.26666667],
[0.40784314, 0.51764706, 0.29411765],
[0.40392157, 0.5254902 , 0.29411765]],
[[0.36470588, 0.44705882, 0.30588235],
[0.42352941, 0.51372549, 0.36078431],
[0.42745098, 0.52941176, 0.34901961],
...,
[0.37647059, 0.50980392, 0.2745098 ],
[0.35686275, 0.49803922, 0.2627451 ],
[0.36862745, 0.51372549, 0.2745098 ]],
[[0.38039216, 0.47843137, 0.3254902 ],
[0.42352941, 0.5254902 , 0.36470588],
[0.41960784, 0.52941176, 0.35686275],
...,
[0.39215686, 0.49019608, 0.30196078],
[0.39607843, 0.49803922, 0.30980392],
[0.38039216, 0.50588235, 0.2745098 ]]],
...,
[[[0.74117647, 0.77647059, 0.60784314],
[0.58823529, 0.62352941, 0.43529412],
[0.75686275, 0.54117647, 0.36078431],
...,
[0.78431373, 0.08627451, 0.04705882],
[0.80392157, 0.12941176, 0.09019608],
[0.83529412, 0.18431373, 0.12941176]],
[[0.76078431, 0.79215686, 0.62352941],
[0.49803922, 0.58431373, 0.31764706],
[0.61176471, 0.6 , 0.3372549 ],
...,
[0.76862745, 0.04705882, 0.01568627],
[0.79215686, 0.09019608, 0.05098039],
[0.82352941, 0.15294118, 0.09803922]],
[[0.74117647, 0.76862745, 0.62352941],
[0.48235294, 0.55294118, 0.28235294],
[0.58039216, 0.65098039, 0.37254902],
...,
[0.78039216, 0.02745098, 0.01568627],
[0.78823529, 0.05490196, 0.03921569],
[0.81960784, 0.11372549, 0.0745098 ]],
...,
[[0.56470588, 0.63921569, 0.43137255],
[0.67843137, 0.75294118, 0.58039216],
[0.55686275, 0.65490196, 0.45490196],
...,
[0.85490196, 0.0745098 , 0.05882353],
[0.85098039, 0.06666667, 0.05490196],
[0.84705882, 0.05490196, 0.03921569]],
[[0.51764706, 0.59607843, 0.37647059],
[0.57647059, 0.67058824, 0.43921569],
[0.68235294, 0.76078431, 0.59215686],
...,
[0.83921569, 0.04705882, 0.03921569],
[0.84313725, 0.04313725, 0.04705882],
[0.83529412, 0.03529412, 0.03137255]],
[[0.4745098 , 0.55294118, 0.3372549 ],
[0.50980392, 0.60392157, 0.3372549 ],
[0.61960784, 0.69411765, 0.45882353],
...,
[0.81568627, 0.01568627, 0.01960784],
[0.83921569, 0.03921569, 0.04313725],
[0.83529412, 0.03921569, 0.03529412]]],
[[[0.49803922, 0.63137255, 0.25098039],
[0.49019608, 0.62352941, 0.24313725],
[0.49411765, 0.59215686, 0.23921569],
...,
[0.23529412, 0.23921569, 0.08235294],
[0.29019608, 0.3254902 , 0.14509804],
[0.32941176, 0.42352941, 0.13333333]],
[[0.50980392, 0.63529412, 0.25490196],
[0.49803922, 0.63137255, 0.25882353],
[0.47058824, 0.56862745, 0.24313725],
...,
[0.23529412, 0.21960784, 0.0745098 ],
[0.28627451, 0.34117647, 0.13333333],
[0.38039216, 0.48235294, 0.18431373]],
[[0.50588235, 0.62352941, 0.25490196],
[0.49803922, 0.63137255, 0.25490196],
[0.47843137, 0.59215686, 0.25098039],
...,
[0.2627451 , 0.25098039, 0.10196078],
[0.30980392, 0.36862745, 0.17647059],
[0.4 , 0.50196078, 0.22352941]],
...,
[[0.38039216, 0.45098039, 0.20784314],
[0.31764706, 0.43529412, 0.16470588],
[0.21176471, 0.29019608, 0.08627451],
...,
[0.23921569, 0.32941176, 0.05882353],
[0.22745098, 0.3372549 , 0.05098039],
[0.20392157, 0.31764706, 0.05882353]],
[[0.41568627, 0.49411765, 0.22745098],
[0.65098039, 0.72941176, 0.49803922],
[0.73333333, 0.80392157, 0.61960784],
...,
[0.25098039, 0.33333333, 0.07058824],
[0.27058824, 0.39607843, 0.05098039],
[0.24705882, 0.36862745, 0.07058824]],
[[0.71372549, 0.79215686, 0.6 ],
[0.72156863, 0.80784314, 0.61568627],
[0.70980392, 0.79607843, 0.6 ],
...,
[0.29019608, 0.3372549 , 0.14117647],
[0.27843137, 0.35294118, 0.08627451],
[0.22352941, 0.29019608, 0.0627451 ]]],
[[[0.50980392, 0.62745098, 0.36078431],
[0.49019608, 0.61960784, 0.36470588],
[0.47843137, 0.6 , 0.35686275],
...,
[0.30588235, 0.38431373, 0.2627451 ],
[0.20392157, 0.24313725, 0.1372549 ],
[0.38039216, 0.4627451 , 0.25882353]],
[[0.49411765, 0.63529412, 0.37647059],
[0.50588235, 0.62745098, 0.39607843],
[0.53333333, 0.65098039, 0.41960784],
...,
[0.25490196, 0.3372549 , 0.19607843],
[0.27843137, 0.31372549, 0.21176471],
[0.40392157, 0.4745098 , 0.29411765]],
[[0.55686275, 0.6627451 , 0.49803922],
[0.54117647, 0.65882353, 0.50588235],
[0.6 , 0.70588235, 0.56862745],
...,
[0.29019608, 0.39215686, 0.24313725],
[0.34509804, 0.39215686, 0.25490196],
[0.45490196, 0.52156863, 0.34117647]],
...,
[[0.30980392, 0.46666667, 0.1254902 ],
[0.35686275, 0.43529412, 0.21568627],
[0.1254902 , 0.20784314, 0.03529412],
...,
[0.67843137, 0.75294118, 0.61176471],
[0.65098039, 0.72156863, 0.61176471],
[0.59607843, 0.69411765, 0.57647059]],
[[0.54117647, 0.61960784, 0.34509804],
[0.35686275, 0.42352941, 0.21568627],
[0.39215686, 0.42745098, 0.27058824],
...,
[0.19215686, 0.21176471, 0.10196078],
[0.20784314, 0.21960784, 0.09803922],
[0.20392157, 0.23529412, 0.10588235]],
[[0.74901961, 0.82352941, 0.59607843],
[0.45098039, 0.5372549 , 0.29019608],
[0.58431373, 0.64313725, 0.42352941],
...,
[0.44313725, 0.46666667, 0.30588235],
[0.6 , 0.62745098, 0.46666667],
[0.70588235, 0.74117647, 0.59607843]]]])
#Tensorflow Keras CNN Model
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten, Activation, Conv2D, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.optimizers import Adam,SGD,Adagrad,Adadelta,RMSprop
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, Callback, ReduceLROnPlateau
checkpoint = ModelCheckpoint(
'model.h5',
monitor = 'val_loss',
verbose = 1,
save_best_only = True)
reduce_lr = ReduceLROnPlateau(
monitor = 'val_loss',
factor = 0.2,
verbose = 1,
patience = 5,
min_lr = 0.001)
# simple early stopping
# es = EarlyStopping(monitor='val_loss', patience=10, verbose=1)
model = Sequential()
model.add(Conv2D(filters = 64, kernel_size = (3,3),padding = 'Same',activation ='relu', input_shape = (size,size,3)))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same',activation ='relu'))
model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same',activation ='relu'))
model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same',activation ='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
#model.add(Dense(256, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dropout(rate=0.5))
model.add(Dense(30, activation = "softmax"))
# Adding a learning rate
opt = tf.keras.optimizers.Adam(learning_rate = 0.0001)
# compile
model.compile(optimizer = opt, loss='categorical_crossentropy', metrics=['accuracy'])
batch_size= 32 #64
epochs= 50
history = model.fit(X_train,y_train_one_hot, batch_size=batch_size,
epochs = epochs,
validation_split=0.3,
verbose = 1,
callbacks = [reduce_lr, checkpoint])
Epoch 1/50 117/117 [==============================] - 168s 1s/step - loss: 3.3682 - accuracy: 0.0462 - val_loss: 3.2303 - val_accuracy: 0.0854 Epoch 00001: val_loss improved from inf to 3.23028, saving model to model.h5 Epoch 2/50 117/117 [==============================] - 173s 1s/step - loss: 3.1940 - accuracy: 0.0965 - val_loss: 2.9239 - val_accuracy: 0.1646 Epoch 00002: val_loss improved from 3.23028 to 2.92390, saving model to model.h5 Epoch 3/50 117/117 [==============================] - 174s 1s/step - loss: 2.9039 - accuracy: 0.1558 - val_loss: 2.5521 - val_accuracy: 0.2706 Epoch 00003: val_loss improved from 2.92390 to 2.55210, saving model to model.h5 Epoch 4/50 117/117 [==============================] - 174s 1s/step - loss: 2.6848 - accuracy: 0.2141 - val_loss: 2.3546 - val_accuracy: 0.3049 Epoch 00004: val_loss improved from 2.55210 to 2.35464, saving model to model.h5 Epoch 5/50 117/117 [==============================] - 174s 1s/step - loss: 2.5136 - accuracy: 0.2565 - val_loss: 2.1197 - val_accuracy: 0.3660 Epoch 00005: val_loss improved from 2.35464 to 2.11972, saving model to model.h5 Epoch 6/50 117/117 [==============================] - 174s 1s/step - loss: 2.3601 - accuracy: 0.2849 - val_loss: 1.9676 - val_accuracy: 0.4377 Epoch 00006: val_loss improved from 2.11972 to 1.96759, saving model to model.h5 Epoch 7/50 117/117 [==============================] - 174s 1s/step - loss: 2.2376 - accuracy: 0.3255 - val_loss: 1.8427 - val_accuracy: 0.4832 Epoch 00007: val_loss improved from 1.96759 to 1.84275, saving model to model.h5 Epoch 8/50 117/117 [==============================] - 175s 1s/step - loss: 2.0549 - accuracy: 0.3680 - val_loss: 1.6791 - val_accuracy: 0.5380 Epoch 00008: val_loss improved from 1.84275 to 1.67907, saving model to model.h5 Epoch 9/50 117/117 [==============================] - 174s 1s/step - loss: 1.9489 - accuracy: 0.3952 - val_loss: 1.5674 - val_accuracy: 0.5873 Epoch 00009: val_loss improved from 1.67907 to 1.56739, saving model to model.h5 Epoch 10/50 117/117 [==============================] - 175s 1s/step - loss: 1.8189 - accuracy: 0.4286 - val_loss: 1.3548 - val_accuracy: 0.6409 Epoch 00010: val_loss improved from 1.56739 to 1.35482, saving model to model.h5 Epoch 11/50 117/117 [==============================] - 176s 2s/step - loss: 1.6605 - accuracy: 0.4730 - val_loss: 1.3413 - val_accuracy: 0.6272 Epoch 00011: val_loss improved from 1.35482 to 1.34133, saving model to model.h5 Epoch 12/50 117/117 [==============================] - 176s 2s/step - loss: 1.5271 - accuracy: 0.5136 - val_loss: 1.1721 - val_accuracy: 0.6877 Epoch 00012: val_loss improved from 1.34133 to 1.17209, saving model to model.h5 Epoch 13/50 117/117 [==============================] - 174s 1s/step - loss: 1.4461 - accuracy: 0.5315 - val_loss: 1.1560 - val_accuracy: 0.6889 Epoch 00013: val_loss improved from 1.17209 to 1.15599, saving model to model.h5 Epoch 14/50 117/117 [==============================] - 178s 2s/step - loss: 1.3475 - accuracy: 0.5607 - val_loss: 1.0753 - val_accuracy: 0.7020 Epoch 00014: val_loss improved from 1.15599 to 1.07527, saving model to model.h5 Epoch 15/50 117/117 [==============================] - 175s 1s/step - loss: 1.2974 - accuracy: 0.5657 - val_loss: 1.0054 - val_accuracy: 0.7294 Epoch 00015: val_loss improved from 1.07527 to 1.00538, saving model to model.h5 Epoch 16/50 117/117 [==============================] - 175s 1s/step - loss: 1.2018 - accuracy: 0.6013 - val_loss: 1.0094 - val_accuracy: 0.7132 Epoch 00016: val_loss did not improve from 1.00538 Epoch 17/50 117/117 [==============================] - 174s 1s/step - loss: 1.0990 - accuracy: 0.6229 - val_loss: 0.9111 - val_accuracy: 0.7512 Epoch 00017: val_loss improved from 1.00538 to 0.91106, saving model to model.h5 Epoch 18/50 117/117 [==============================] - 175s 1s/step - loss: 1.1211 - accuracy: 0.6280 - val_loss: 0.9198 - val_accuracy: 0.7363 Epoch 00018: val_loss did not improve from 0.91106 Epoch 19/50 117/117 [==============================] - 173s 1s/step - loss: 0.9968 - accuracy: 0.6603 - val_loss: 0.9283 - val_accuracy: 0.7363 Epoch 00019: val_loss did not improve from 0.91106 Epoch 20/50 117/117 [==============================] - 174s 1s/step - loss: 0.9344 - accuracy: 0.6799 - val_loss: 0.8917 - val_accuracy: 0.7531 Epoch 00020: val_loss improved from 0.91106 to 0.89172, saving model to model.h5 Epoch 21/50 117/117 [==============================] - 175s 1s/step - loss: 0.9097 - accuracy: 0.6892 - val_loss: 0.8681 - val_accuracy: 0.7463 Epoch 00021: val_loss improved from 0.89172 to 0.86809, saving model to model.h5 Epoch 22/50 117/117 [==============================] - 175s 2s/step - loss: 0.8937 - accuracy: 0.6916 - val_loss: 0.9488 - val_accuracy: 0.7475 Epoch 00022: val_loss did not improve from 0.86809 Epoch 23/50 117/117 [==============================] - 174s 1s/step - loss: 0.8549 - accuracy: 0.7012 - val_loss: 1.0315 - val_accuracy: 0.7095 Epoch 00023: val_loss did not improve from 0.86809 Epoch 24/50 117/117 [==============================] - 174s 1s/step - loss: 0.8032 - accuracy: 0.7175 - val_loss: 0.8563 - val_accuracy: 0.7494 Epoch 00024: val_loss improved from 0.86809 to 0.85629, saving model to model.h5 Epoch 25/50 117/117 [==============================] - 175s 1s/step - loss: 0.8094 - accuracy: 0.7210 - val_loss: 0.9079 - val_accuracy: 0.7475 Epoch 00025: val_loss did not improve from 0.85629 Epoch 26/50 117/117 [==============================] - 173s 1s/step - loss: 0.7873 - accuracy: 0.7261 - val_loss: 0.8809 - val_accuracy: 0.7556 Epoch 00026: val_loss did not improve from 0.85629 Epoch 27/50 117/117 [==============================] - 174s 1s/step - loss: 0.7206 - accuracy: 0.7354 - val_loss: 0.8840 - val_accuracy: 0.7625 Epoch 00027: val_loss did not improve from 0.85629 Epoch 28/50 117/117 [==============================] - 174s 1s/step - loss: 0.6884 - accuracy: 0.7491 - val_loss: 0.8465 - val_accuracy: 0.7718 Epoch 00028: val_loss improved from 0.85629 to 0.84653, saving model to model.h5 Epoch 29/50 117/117 [==============================] - 174s 1s/step - loss: 0.6811 - accuracy: 0.7563 - val_loss: 0.9482 - val_accuracy: 0.7425 Epoch 00029: val_loss did not improve from 0.84653 Epoch 30/50 117/117 [==============================] - 173s 1s/step - loss: 0.6660 - accuracy: 0.7592 - val_loss: 0.8199 - val_accuracy: 0.7631 Epoch 00030: val_loss improved from 0.84653 to 0.81992, saving model to model.h5 Epoch 31/50 117/117 [==============================] - 175s 1s/step - loss: 0.6149 - accuracy: 0.7798 - val_loss: 0.8153 - val_accuracy: 0.7675 Epoch 00031: val_loss improved from 0.81992 to 0.81528, saving model to model.h5 Epoch 32/50 117/117 [==============================] - 174s 1s/step - loss: 0.6122 - accuracy: 0.7785 - val_loss: 0.9129 - val_accuracy: 0.7500 Epoch 00032: val_loss did not improve from 0.81528 Epoch 33/50 117/117 [==============================] - 175s 1s/step - loss: 0.5822 - accuracy: 0.7873 - val_loss: 0.8430 - val_accuracy: 0.7781 Epoch 00033: val_loss did not improve from 0.81528 Epoch 34/50 117/117 [==============================] - 174s 1s/step - loss: 0.5771 - accuracy: 0.7838 - val_loss: 1.0033 - val_accuracy: 0.7394 Epoch 00034: val_loss did not improve from 0.81528 Epoch 35/50 117/117 [==============================] - 174s 1s/step - loss: 0.5698 - accuracy: 0.7910 - val_loss: 0.9029 - val_accuracy: 0.7693 Epoch 00035: val_loss did not improve from 0.81528 Epoch 36/50 117/117 [==============================] - 174s 1s/step - loss: 0.5663 - accuracy: 0.7918 - val_loss: 0.9464 - val_accuracy: 0.7681 Epoch 00036: val_loss did not improve from 0.81528 Epoch 37/50 117/117 [==============================] - 173s 1s/step - loss: 0.5345 - accuracy: 0.8014 - val_loss: 0.8678 - val_accuracy: 0.7818 Epoch 00037: val_loss did not improve from 0.81528 Epoch 38/50 117/117 [==============================] - 174s 1s/step - loss: 0.5159 - accuracy: 0.8116 - val_loss: 0.9751 - val_accuracy: 0.7537 Epoch 00038: val_loss did not improve from 0.81528 Epoch 39/50 117/117 [==============================] - 173s 1s/step - loss: 0.5086 - accuracy: 0.8137 - val_loss: 0.9054 - val_accuracy: 0.7762 Epoch 00039: val_loss did not improve from 0.81528 Epoch 40/50 117/117 [==============================] - 174s 1s/step - loss: 0.4919 - accuracy: 0.8220 - val_loss: 0.9370 - val_accuracy: 0.7650 Epoch 00040: val_loss did not improve from 0.81528 Epoch 41/50 117/117 [==============================] - 174s 1s/step - loss: 0.5255 - accuracy: 0.8132 - val_loss: 0.9393 - val_accuracy: 0.7681 Epoch 00041: val_loss did not improve from 0.81528 Epoch 42/50 117/117 [==============================] - 178s 2s/step - loss: 0.4733 - accuracy: 0.8287 - val_loss: 0.9302 - val_accuracy: 0.7743 Epoch 00042: val_loss did not improve from 0.81528 Epoch 43/50 117/117 [==============================] - 174s 1s/step - loss: 0.4818 - accuracy: 0.8247 - val_loss: 1.1031 - val_accuracy: 0.7550 Epoch 00043: val_loss did not improve from 0.81528 Epoch 44/50 117/117 [==============================] - 174s 1s/step - loss: 0.5047 - accuracy: 0.8105 - val_loss: 0.9067 - val_accuracy: 0.7756 Epoch 00044: val_loss did not improve from 0.81528 Epoch 45/50 117/117 [==============================] - 177s 2s/step - loss: 0.4536 - accuracy: 0.8316 - val_loss: 0.9142 - val_accuracy: 0.7724 Epoch 00045: val_loss did not improve from 0.81528 Epoch 46/50 117/117 [==============================] - 176s 2s/step - loss: 0.4404 - accuracy: 0.8351 - val_loss: 0.9465 - val_accuracy: 0.7830 Epoch 00046: val_loss did not improve from 0.81528 Epoch 47/50 117/117 [==============================] - 174s 1s/step - loss: 0.4244 - accuracy: 0.8447 - val_loss: 1.0345 - val_accuracy: 0.7662 Epoch 00047: val_loss did not improve from 0.81528 Epoch 48/50 117/117 [==============================] - 181s 2s/step - loss: 0.4247 - accuracy: 0.8383 - val_loss: 1.0474 - val_accuracy: 0.7481 Epoch 00048: val_loss did not improve from 0.81528 Epoch 49/50 117/117 [==============================] - 194s 2s/step - loss: 0.4286 - accuracy: 0.8437 - val_loss: 0.8481 - val_accuracy: 0.7905 Epoch 00049: val_loss did not improve from 0.81528 Epoch 50/50 117/117 [==============================] - 198s 2s/step - loss: 0.4317 - accuracy: 0.8386 - val_loss: 1.1864 - val_accuracy: 0.7693 Epoch 00050: val_loss did not improve from 0.81528
#accuracy
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.xlabel("Epoch")
plt.ylabel("Accuracy")
plt.legend(['Train','Val'],loc='upper left')
plt.show()
#Loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model Loss')
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.legend(['Train','Val'],loc='upper left')
plt.show()
# Prediction
predict_model = model.predict(np.array(X_train))
predict_model = np.argmax(predict_model, axis=1)
predict_model
array([24, 3, 1, ..., 5, 20, 5], dtype=int64)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_true = y_train, y_pred = predict_model)
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=90)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
cm_plot_labels = ['Bird\'s-foot Trefoil', 'Brown Knapweed', 'Buttercup', 'Chamomile', 'Common Dandelion', 'Common Poppy', 'Cornflower', 'Cow Parsley',
'Cuckooflower', 'Field Mouse-ear', 'Flatweed', 'Hares-foot clover', 'Health Spotted Orchid', 'Hoary Alyssum', 'Lesser Spearwort',
'Marsh Lousewort', 'Marsh marigold', 'Meadow Thistle', 'Ox-eye Daisy', 'Perforate St John\'s-wort', 'Purple Loosestrife','Ragwort', 'Red Clover', 'Redstem Filaree',
'Southern Marsh Orchid', 'Tansy', 'White Clover', 'Wild Carrot', 'Yarrow', 'Yellow Loosestrife']
import itertools
fig, ax = plt.subplots(figsize=(12, 12))
plot_confusion_matrix(cm=cm, classes=cm_plot_labels, title='Confusion Matrix')
Confusion matrix, without normalization
[[160 0 0 1 7 0 0 0 0 0 3 0 0 0 0 0 4 0
0 0 0 0 0 0 0 0 0 1 0 1]
[ 0 168 0 0 0 0 0 2 1 0 0 0 3 0 0 6 0 2
0 0 5 1 1 0 0 1 0 1 0 1]
[ 3 0 165 0 1 0 0 0 0 0 11 0 0 0 1 0 6 0
0 2 0 0 0 0 0 0 0 0 0 8]
[ 2 1 0 143 0 0 0 2 1 1 3 0 0 1 0 0 0 0
2 0 0 0 0 0 0 0 0 2 1 1]
[ 2 0 0 0 168 0 0 0 0 0 3 0 0 0 0 0 1 0
0 0 0 1 0 0 0 0 0 0 0 1]
[ 0 0 0 0 0 187 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 193 0 0 0 0 0 1 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0]
[ 0 0 0 0 2 0 0 170 3 0 0 0 2 2 0 0 0 0
0 0 0 3 0 0 0 0 0 2 2 1]
[ 0 0 0 0 0 0 0 1 161 0 0 0 3 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 3 0 166 0 0 0 2 1 0 0 0
0 0 0 0 0 0 0 0 3 0 7 0]
[ 0 0 1 0 2 0 0 0 0 0 178 0 0 0 0 0 4 0
0 1 0 0 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 9 1 0 0 131 0 15 0 0 0 0
0 0 0 0 0 0 0 1 2 0 0 1]
[ 0 0 0 0 0 0 0 1 3 0 0 0 170 0 0 0 0 0
0 0 1 0 1 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 10 4 1 0 1 5 167 0 0 0 0
0 0 0 0 0 0 0 0 1 0 3 0]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 193 0 0 0
0 1 0 2 0 0 0 0 1 0 0 1]
[ 0 1 0 0 0 0 0 1 4 0 0 0 1 0 0 166 0 0
0 0 0 0 0 0 3 0 0 2 0 0]
[ 4 0 1 0 2 0 0 0 0 0 3 0 0 0 0 0 153 0
2 0 0 2 0 0 0 0 0 0 0 1]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 175
0 0 0 0 0 0 0 0 0 0 0 0]
[ 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0
178 0 0 2 0 0 0 2 0 0 0 0]
[ 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0
0 174 0 3 0 0 0 0 0 0 0 0]
[ 0 9 0 0 0 1 0 0 0 0 0 0 2 0 0 1 0 0
0 0 167 0 1 1 1 0 0 0 0 0]
[ 0 0 0 0 7 0 0 0 0 0 0 0 0 0 0 0 0 0
0 3 0 164 0 0 0 0 0 0 0 0]
[ 0 12 0 1 0 0 0 0 2 0 0 0 0 2 0 0 0 6
0 0 8 0 98 0 0 0 0 0 0 0]
[ 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 4 0 0 170 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 177 0 0 0 0 0]
[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0
0 0 0 1 0 0 0 172 0 0 0 13]
[ 0 4 0 0 1 0 0 3 0 4 0 0 7 2 1 0 0 1
1 0 0 0 0 0 0 0 149 0 0 0]
[ 0 0 0 0 0 0 0 4 3 0 0 0 1 0 0 5 0 0
0 0 0 0 0 0 0 0 0 173 0 0]
[ 0 0 0 0 0 0 0 2 1 0 0 0 4 0 0 1 0 0
0 0 0 0 0 0 0 0 0 0 166 0]
[ 1 1 0 0 4 0 0 1 1 0 0 0 0 0 2 0 0 0
0 2 0 8 0 0 0 0 0 1 0 155]]
import matplotlib.pyplot as plt
categories = np.sort(os.listdir(folder_dir))
fig, ax = plt.subplots(6,6, figsize=(25, 40))
for i in range(6):
for j in range(6):
k = int(np.random.random_sample() * len(X_test))
if(categories[np.argmax(y_test_one_hot[k])] == categories[np.argmax(model.predict(X_test)[k])]):
ax[i,j].set_title("TRUE: " + categories[np.argmax(y_test_one_hot[k])], color='green')
ax[i,j].set_xlabel("PREDICTED: " + categories[np.argmax(model.predict(X_test)[k])], color='green')
ax[i,j].imshow(np.array(X_test)[k].reshape(size, size, 3), cmap='gray')
else:
ax[i,j].set_title("TRUE: " + categories[np.argmax(y_test_one_hot[k])], color='red')
ax[i,j].set_xlabel("PREDICTED: " + categories[np.argmax(model.predict(X_test)[k])], color='red')
ax[i,j].imshow(np.array(X_test)[k].reshape(size, size, 3), cmap='gray')